home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyo (Python 2.5)
-
- from __future__ import with_statement
- from threading import RLock, currentThread
- from time import time
- import wx
- from cStringIO import StringIO
- from path import path
- from util import WeakDelegate
- from util.cacheable import get_cache_root
- from util.lrucache import lru_cache, LRU
- from common import netcall
- import common.asynchttp as asynchttp
- from logging import getLogger
- log = getLogger('favicons')
- __all__ = ('favicon',)
- FETCHING = object()
- EMPTY = object()
- FAVICON_EXPIRE_SECS = 2592000
- FAVICON_EXPIRE_ERROR_SECS = 86400
- FAVICON_CACHE_DIR = 'favicons'
- ICON_EXT = '.ico'
- LINK_EXT = '.redirect'
- MAX_SUBDOMAIN_CHECK = 5
- _favicons_lru = { }
- _domain_lru = { }
- manual_redirects = {
- 'gmail.com': 'mail.google.com',
- 'facebookmail.com': 'www.facebook.com',
- 'papajohns-specials.com': 'papajohns.com',
- 'papajohnsonline.com': 'papajohns.com' }
-
- def favicon(domain):
- cached = get_cached(domain)
- if cached in (FETCHING, EMPTY):
- return None
- elif cached is not None:
- return cached
- else:
- fetch_favicon(domain)
-
- on_icon = WeakDelegate()
-
- def cache_path():
- p = get_cache_root() / FAVICON_CACHE_DIR
- if not p.isdir():
- p.makedirs()
-
- return p
-
-
- def clear_cache():
- cache_path().rmtree()
-
-
- def get_icon_domain(domain):
- domain = manual_redirects.get(domain, domain)
- if domain in _domain_lru:
- return _domain_lru[domain]
-
- root = cache_path()
- p = root / (domain + LINK_EXT)
- if p.isfile():
- result = p.bytes()
- else:
- result = domain
- _domain_lru[domain] = result
- return result
-
-
- def get_cached(domain, done_fetching = False):
- domain = get_icon_domain(domain)
- if domain in _favicons_lru:
- return _favicons_lru[domain]
-
- if not done_fetching and is_fetching(domain):
- return FETCHING
-
- cache_file = cache_path() / domain + ICON_EXT
- if not cache_file.isfile():
- return None
-
- age = time() - cache_file.mtime
- if not done_fetching:
- if age > FAVICON_EXPIRE_SECS or age < 0:
- log.info('expiring favicon for %s' % domain)
- cache_file.remove()
- return None
-
-
- if cache_file.size == 0:
- if age > FAVICON_EXPIRE_ERROR_SECS:
- log.info('expiring empty favicon cache file for %s' % domain)
- cache_file.remove()
- return None
-
- log.debug('%s has an empty cache file', domain)
- _favicons_lru[domain] = EMPTY
- return EMPTY
-
-
- try:
- import wx
- log.debug('loading favicon cache file for %s', domain)
- bitmap = wx.Bitmap(cache_file)
- if not bitmap.IsOk():
- raise Exception('bitmap.IsOk() != True')
- except Exception:
- e = None
- log.warning('Error loading image file: %s' % e)
- cache_file.remove()
-
- _favicons_lru[domain] = bitmap
- on_icon(domain)
- return bitmap
-
-
- def cache_icon(domain, linked_domains, data):
- cp = cache_path()
- icon_file = cp / domain + ICON_EXT
- if icon_file.isfile():
- log.warning('caching file to %s but it already exists', icon_file)
-
- icon_file.write_bytes(data)
- for d in linked_domains:
- (cp / d + LINK_EXT).write_bytes(domain)
-
- _domain_lru.clear()
- wx.CallAfter(get_cached, domain, done_fetching = True)
- log.debug('cached %d bytes of data for %r (linked: %s)', len(data), domain, ', '.join(linked_domains))
-
-
- def cache_noicon(domain, linked_domains):
- return cache_icon(domain, linked_domains, '')
-
-
- def fetch_favicon(domain, linked_domains = None):
- real_domain = get_icon_domain(domain)
- if linked_domains is None:
- linked_domains = []
-
- if real_domain != domain:
- linked_domains.append(domain)
-
- domain = real_domain
- wwwdomain = 'www.' + domain
- if not domain.startswith('www') or wwwdomain in linked_domains:
- linked_domains.append(domain)
- domain = wwwdomain
-
- url = 'http://' + domain + '/favicon.ico'
-
- def on_success(req, resp):
- data = resp.read()
- log.info('httpopen(%s): received %d bytes of data', url, len(data))
- log.info('%r', resp)
- cache_icon(domain, linked_domains, data)
- unset_fetching([
- domain])
-
-
- def on_error(req = (None, None, None), resp = (None, None)):
- log.error('on_error for domain=%r, linked_domains=%r', domain, linked_domains)
- if domain.count('.') < domain.count('.'):
- pass
- elif domain.count('.') < MAX_SUBDOMAIN_CHECK:
- new_domain = '.'.join(domain.split('.')[1:])
- wx.CallAfter(fetch_favicon, new_domain, linked_domains + [
- domain])
- return None
- else:
- log.error('%r', resp)
- cache_noicon(domain, linked_domains)
- unset_fetching(linked_domains + [
- domain])
-
-
- def on_redirect(req):
- if 'favicon' not in req.get_selector():
- new_url = 'http://%s/%s' % (req.get_host(), 'favicon.ico')
- old_req = req._orig_request
- checked_urls = getattr(old_req, '_favicons_checked_urls', set())
- if new_url in checked_urls:
- return None
-
- checked_urls.add(new_url)
- req = req.copy(url = new_url)
- req._favicons_checked_urls = old_req._favicons_checked_urls = checked_urls
- req._orig_request = old_req
-
- return req
-
- fetch_lock.__enter__()
-
- try:
- if domain in currently_fetching:
- log.info('already fetching %r', url)
- return None
- else:
- log.info('getting %r', url)
- currently_fetching.add(domain)
- finally:
- pass
-
- (None, None, fetch_lock, netcall)((lambda : asynchttp.httpopen(url, success = on_success, error = on_error, on_redirect = on_redirect)))
-
- fetch_lock = RLock()
- currently_fetching = set()
-
- def set_fetching(domain):
- fetch_lock.__enter__()
-
- try:
- currently_fetching.add(domain)
- finally:
- pass
-
-
-
- def unset_fetching(domains):
- global currently_fetching
- fetch_lock.__enter__()
-
- try:
- currently_fetching -= set(domains)
- finally:
- pass
-
-
-
- def is_fetching(domain):
- fetch_lock.__enter__()
-
- try:
- return domain in currently_fetching
- finally:
- pass
-
-
-